From a97227a56dcce97c08057130b028c82917a6bbc0 Mon Sep 17 00:00:00 2001 From: "kfraser@ubuntu.eng.hq.xensource.com" Date: Tue, 5 Sep 2006 18:28:27 -0700 Subject: [PATCH] [XEN] Fix bitops inline asm to specify that memory is clobbered. Necessary because the modified word may not be the one directly addressed by the memory parameter (since the parameter actually points at an array, not a scalar value). The change to set_bit/__set_bit is reported to fix an issue on x460 hardware, as reported (and this fix suggested) by Ryan Harper Signed-off-by: Keir Fraser --- xen/include/asm-x86/bitops.h | 57 ++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/xen/include/asm-x86/bitops.h b/xen/include/asm-x86/bitops.h index b9fd2557d0..14d7e1451b 100644 --- a/xen/include/asm-x86/bitops.h +++ b/xen/include/asm-x86/bitops.h @@ -7,20 +7,19 @@ #include -/* - * These have to be done with inline assembly: that way the bit-setting - * is guaranteed to be atomic. All bit operations return 0 if the bit - * was cleared before the operation and != 0 if it was not. - * - * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). - */ - #ifdef CONFIG_SMP #define LOCK_PREFIX "lock ; " #else #define LOCK_PREFIX "" #endif +/* + * We use the "+m" constraint because the memory operand is both read from + * and written to. Since the operand is in fact a word array, we also + * specify "memory" in the clobbers list to indicate that words other than + * the one directly addressed by the memory operand may be modified. + */ + #define ADDR (*(volatile long *) addr) /** @@ -37,8 +36,8 @@ static __inline__ void set_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btsl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -54,8 +53,8 @@ static __inline__ void __set_bit(int nr, volatile void * addr) { __asm__( "btsl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -72,8 +71,8 @@ static __inline__ void clear_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btrl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -89,8 +88,8 @@ static __inline__ void __clear_bit(int nr, volatile void * addr) { __asm__( "btrl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } #define smp_mb__before_clear_bit() barrier() @@ -109,8 +108,8 @@ static __inline__ void __change_bit(int nr, volatile void * addr) { __asm__ __volatile__( "btcl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -126,8 +125,8 @@ static __inline__ void change_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btcl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -144,7 +143,7 @@ static __inline__ int test_and_set_bit(int nr, volatile void * addr) __asm__ __volatile__( LOCK_PREFIX "btsl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -164,8 +163,8 @@ static __inline__ int __test_and_set_bit(int nr, volatile void * addr) __asm__( "btsl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) - :"dIr" (nr)); + :"=r" (oldbit),"+m" (ADDR) + :"dIr" (nr) : "memory"); return oldbit; } @@ -183,7 +182,7 @@ static __inline__ int test_and_clear_bit(int nr, volatile void * addr) __asm__ __volatile__( LOCK_PREFIX "btrl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -203,8 +202,8 @@ static __inline__ int __test_and_clear_bit(int nr, volatile void * addr) __asm__( "btrl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) - :"dIr" (nr)); + :"=r" (oldbit),"+m" (ADDR) + :"dIr" (nr) : "memory"); return oldbit; } @@ -215,7 +214,7 @@ static __inline__ int __test_and_change_bit(int nr, volatile void * addr) __asm__ __volatile__( "btcl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -234,7 +233,7 @@ static __inline__ int test_and_change_bit(int nr, volatile void * addr) __asm__ __volatile__( LOCK_PREFIX "btcl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -242,7 +241,7 @@ static __inline__ int test_and_change_bit(int nr, volatile void * addr) static __inline__ int constant_test_bit(int nr, const volatile void * addr) { - return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; + return ((1U << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; } static __inline__ int variable_test_bit(int nr, volatile void * addr) -- 2.30.2